-use std::default::Default;
use std::fmt;
use std::path::{PathBuf, Path};
use semver::Version;
use rustc_serialize::{Encoder, Encodable};
-use core::{Dependency, PackageId, Summary};
+use core::{Dependency, PackageId, PackageIdSpec, Summary};
use core::package_id::Metadata;
use util::{CargoResult, human};
include: Vec<String>,
metadata: ManifestMetadata,
profiles: Profiles,
- publish: bool
+ publish: bool,
+ replace: Vec<(PackageIdSpec, Dependency)>,
}
/// General metadata about a package which is just blindly uploaded to the
links: Option<String>,
metadata: ManifestMetadata,
profiles: Profiles,
- publish: bool) -> Manifest {
+ publish: bool,
+ replace: Vec<(PackageIdSpec, Dependency)>) -> Manifest {
Manifest {
summary: summary,
targets: targets,
metadata: metadata,
profiles: profiles,
publish: publish,
+ replace: replace,
}
}
pub fn warnings(&self) -> &[String] { &self.warnings }
pub fn profiles(&self) -> &Profiles { &self.profiles }
pub fn publish(&self) -> bool { self.publish }
+ pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { &self.replace }
pub fn links(&self) -> Option<&str> {
self.links.as_ref().map(|s| &s[..])
}
PackageRegistry {
sources: SourceMap::new(),
source_ids: HashMap::new(),
- overrides: vec![],
+ overrides: Vec::new(),
config: config,
locked: HashMap::new(),
}
impl<'cfg> Registry for PackageRegistry<'cfg> {
fn query(&mut self, dep: &Dependency) -> CargoResult<Vec<Summary>> {
- let overrides = try!(self.query_overrides(dep));
+ let overrides = try!(self.query_overrides(&dep));
let ret = if overrides.is_empty() {
// Ensure the requested source_id is loaded
try!(self.ensure_loaded(dep.source_id(), Kind::Normal));
- let mut ret = Vec::new();
- for (id, src) in self.sources.sources_mut() {
- if id == dep.source_id() {
- ret.extend(try!(src.query(dep)).into_iter());
- }
+
+ match self.sources.get_mut(dep.source_id()) {
+ Some(src) => try!(src.query(&dep)),
+ None => Vec::new(),
}
- ret
} else {
overrides
};
let mut g = Graph::new();
let mut tmp = HashMap::new();
+ let mut replacements = HashMap::new();
let packages = Vec::new();
let packages = self.package.as_ref().unwrap_or(&packages);
- let root = try!(to_package_id(&self.root.name,
- &self.root.version,
- self.root.source.as_ref(),
- default, &path_deps));
- let ids = try!(packages.iter().map(|p| {
- to_package_id(&p.name, &p.version, p.source.as_ref(),
+ let id2pkgid = |id: &EncodablePackageId| {
+ to_package_id(&id.name, &id.version, id.source.as_ref(),
default, &path_deps)
- }).collect::<CargoResult<Vec<_>>>());
+ };
+ let dep2pkgid = |dep: &EncodableDependency| {
+ to_package_id(&dep.name, &dep.version, dep.source.as_ref(),
+ default, &path_deps)
+ };
+
+ let root = try!(dep2pkgid(&self.root));
+ let ids = try!(packages.iter().map(&dep2pkgid)
+ .collect::<CargoResult<Vec<_>>>());
{
let mut register_pkg = |pkgid: &PackageId| {
{
let mut add_dependencies = |id: &PackageId, pkg: &EncodableDependency|
-> CargoResult<()> {
+ if let Some(ref replace) = pkg.replace {
+ let replace = try!(id2pkgid(replace));
+ let replace_precise = tmp.get(&replace).map(|p| {
+ replace.with_precise(p.clone())
+ }).unwrap_or(replace);
+ replacements.insert(id.clone(), replace_precise);
+ assert!(pkg.dependencies.is_none());
+ return Ok(())
+ }
+
let deps = match pkg.dependencies {
Some(ref deps) => deps,
None => return Ok(()),
};
for edge in deps.iter() {
- let to_depend_on = try!(to_package_id(&edge.name,
- &edge.version,
- edge.source.as_ref(),
- default,
- &path_deps));
+ let to_depend_on = try!(id2pkgid(edge));
let precise_pkgid =
tmp.get(&to_depend_on)
.map(|p| to_depend_on.with_precise(p.clone()))
root: root,
features: HashMap::new(),
metadata: self.metadata.clone(),
+ replacements: replacements,
})
}
}
name: String,
version: String,
source: Option<SourceId>,
- dependencies: Option<Vec<EncodablePackageId>>
+ dependencies: Option<Vec<EncodablePackageId>>,
+ replace: Option<EncodablePackageId>,
}
#[derive(Debug, PartialOrd, Ord, PartialEq, Eq)]
let encodable = ids.iter().filter_map(|&id| {
if self.root == *id { return None; }
- Some(encodable_resolve_node(id, &self.graph))
+ Some(encodable_resolve_node(id, self))
}).collect::<Vec<EncodableDependency>>();
EncodableResolve {
package: Some(encodable),
- root: encodable_resolve_node(&self.root, &self.graph),
+ root: encodable_resolve_node(&self.root, self),
metadata: self.metadata.clone(),
}.encode(s)
}
}
-fn encodable_resolve_node(id: &PackageId, graph: &Graph<PackageId>)
+fn encodable_resolve_node(id: &PackageId, resolve: &Resolve)
-> EncodableDependency {
- let deps = graph.edges(id).map(|edge| {
- let mut deps = edge.map(encodable_package_id).collect::<Vec<_>>();
- deps.sort();
- deps
- });
+ let (replace, deps) = match resolve.replacement(id) {
+ Some(id) => {
+ (Some(encodable_package_id(id)), None)
+ }
+ None => {
+ let mut deps = resolve.graph.edges(id)
+ .into_iter().flat_map(|a| a)
+ .map(encodable_package_id)
+ .collect::<Vec<_>>();
+ deps.sort();
+ (None, Some(deps))
+ }
+ };
let source = if id.source_id().is_path() {
None
version: id.version().to_string(),
source: source,
dependencies: deps,
+ replace: replace,
}
}
//! which is more worried about discovering crates from various sources, this
//! module just uses the Registry trait as a source to learn about crates from.
//!
-//! Actually solving a constraint graph is an NP-hard problem. This algorithm
-//! is basically a nice heuristic to make sure we get roughly the best answer
+//! Actually solving a constraint graph is an NP-hard problem. This algorithm
+//! is basically a nice heuristic to make sure we get roughly the best answer
//! most of the time. The constraints that we're working with are:
//!
//! 1. Each crate can have any number of dependencies. Each dependency can
use core::PackageIdSpec;
use util::{CargoResult, Graph, human, CargoError};
use util::profile;
+use util::ChainError;
use util::graph::{Nodes, Edges};
pub use self::encode::{EncodableResolve, EncodableDependency, EncodablePackageId};
#[derive(PartialEq, Eq, Clone)]
pub struct Resolve {
graph: Graph<PackageId>,
+ replacements: HashMap<PackageId, PackageId>,
features: HashMap<PackageId, HashSet<String>>,
root: PackageId,
metadata: Option<Metadata>,
}
+pub struct Deps<'a> {
+ edges: Option<Edges<'a, PackageId>>,
+ resolve: &'a Resolve,
+}
+
+pub struct DepsNotReplaced<'a> {
+ edges: Option<Edges<'a, PackageId>>,
+}
+
#[derive(Clone, Copy)]
pub enum Method<'a> {
Everything,
// Err(..) == standard transient error (e.g. I/O error)
// Ok(Err(..)) == resolve error, but is human readable
// Ok(Ok(..)) == success in resolving
-type ResolveResult = CargoResult<CargoResult<Box<Context>>>;
+type ResolveResult<'a> = CargoResult<CargoResult<Box<Context<'a>>>>;
// Information about the dependencies for a crate, a tuple of:
//
// (dependency info, candidates, features activated)
-type DepInfo = (Dependency, Vec<Rc<Summary>>, Vec<String>);
+type DepInfo = (Dependency, Vec<Candidate>, Vec<String>);
+
+#[derive(Clone)]
+struct Candidate {
+ summary: Rc<Summary>,
+ replace: Option<Rc<Summary>>,
+}
impl Resolve {
fn new(root: PackageId) -> Resolve {
let mut g = Graph::new();
g.add(root.clone(), &[]);
- Resolve { graph: g, root: root, features: HashMap::new(), metadata: None }
+ Resolve {
+ graph: g,
+ root: root,
+ replacements: HashMap::new(),
+ features: HashMap::new(),
+ metadata: None,
+ }
}
pub fn copy_metadata(&mut self, other: &Resolve) {
pub fn root(&self) -> &PackageId { &self.root }
- pub fn deps(&self, pkg: &PackageId) -> Option<Edges<PackageId>> {
- self.graph.edges(pkg)
+ pub fn deps(&self, pkg: &PackageId) -> Deps {
+ Deps { edges: self.graph.edges(pkg), resolve: self }
+ }
+
+ pub fn deps_not_replaced(&self, pkg: &PackageId) -> DepsNotReplaced {
+ DepsNotReplaced { edges: self.graph.edges(pkg) }
+ }
+
+ pub fn replacement(&self, pkg: &PackageId) -> Option<&PackageId> {
+ self.replacements.get(pkg)
+ }
+
+ pub fn replacements(&self) -> &HashMap<PackageId, PackageId> {
+ &self.replacements
}
pub fn features(&self, pkg: &PackageId) -> Option<&HashSet<String>> {
}
}
+impl<'a> Iterator for Deps<'a> {
+ type Item = &'a PackageId;
+
+ fn next(&mut self) -> Option<&'a PackageId> {
+ self.edges.as_mut()
+ .and_then(|e| e.next())
+ .map(|id| self.resolve.replacement(id).unwrap_or(id))
+ }
+}
+
+impl<'a> Iterator for DepsNotReplaced<'a> {
+ type Item = &'a PackageId;
+
+ fn next(&mut self) -> Option<&'a PackageId> {
+ self.edges.as_mut().and_then(|e| e.next())
+ }
+}
+
#[derive(Clone)]
-struct Context {
+struct Context<'a> {
activations: HashMap<(String, SourceId), Vec<Rc<Summary>>>,
resolve: Resolve,
+ replacements: &'a [(PackageIdSpec, Dependency)],
}
/// Builds the list of all packages required to build the first argument.
-pub fn resolve(summary: &Summary, method: &Method,
+pub fn resolve(summary: &Summary,
+ method: &Method,
+ replacements: &[(PackageIdSpec, Dependency)],
registry: &mut Registry) -> CargoResult<Resolve> {
trace!("resolve; summary={}", summary.package_id());
let summary = Rc::new(summary.clone());
let cx = Context {
resolve: Resolve::new(summary.package_id().clone()),
activations: HashMap::new(),
+ replacements: replacements,
};
let _p = profile::start(format!("resolving: {}", summary.package_id()));
let cx = try!(activate_deps_loop(cx, registry, summary, method));
Ok(cx.resolve)
}
-/// Attempts to activate the summary `parent` in the context `cx`.
+/// Attempts to activate the summary `candidate` in the context `cx`.
///
/// This function will pull dependency summaries from the registry provided, and
/// the dependencies of the package will be determined by the `method` provided.
-/// If `parent` was activated, this function returns the dependency frame to
+/// If `candidate` was activated, this function returns the dependency frame to
/// iterate through next.
fn activate(cx: &mut Context,
registry: &mut Registry,
- parent: Rc<Summary>,
+ parent: Option<&Rc<Summary>>,
+ candidate: Candidate,
method: &Method)
-> CargoResult<Option<DepsFrame>> {
- // Dependency graphs are required to be a DAG, so we keep a set of
- // packages we're visiting and bail if we hit a dupe.
- let id = parent.package_id().clone();
+ if let Some(parent) = parent {
+ cx.resolve.graph.link(parent.package_id().clone(),
+ candidate.summary.package_id().clone());
+ }
- // If we're already activated, then that was easy!
- if cx.flag_activated(&parent, method) {
+ if cx.flag_activated(&candidate.summary, method) {
return Ok(None);
}
- trace!("activating {}", parent.package_id());
- let deps = try!(cx.build_deps(registry, &parent, method));
+ let candidate = match candidate.replace {
+ Some(replace) => {
+ cx.resolve.replacements.insert(candidate.summary.package_id().clone(),
+ replace.package_id().clone());
+ if cx.flag_activated(&replace, method) {
+ return Ok(None);
+ }
+ trace!("activating {} (replacing {})", replace.package_id(),
+ candidate.summary.package_id());
+ replace
+ }
+ None => {
+ trace!("activating {}", candidate.summary.package_id());
+ candidate.summary
+ }
+ };
+
+ let deps = try!(cx.build_deps(registry, &candidate, method));
- Ok(Some(DepsFrame{
- parent: parent,
+ Ok(Some(DepsFrame {
+ parent: candidate,
remaining_siblings: RcVecIter::new(deps),
- id: id,
}))
}
struct DepsFrame {
parent: Rc<Summary>,
remaining_siblings: RcVecIter<DepInfo>,
- id: PackageId,
}
impl DepsFrame {
}
}
-struct BacktrackFrame {
- context_backup: Context,
+struct BacktrackFrame<'a> {
+ context_backup: Context<'a>,
deps_backup: BinaryHeap<DepsFrame>,
- remaining_candidates: RcVecIter<Rc<Summary>>,
+ remaining_candidates: RcVecIter<Candidate>,
parent: Rc<Summary>,
dep: Dependency,
features: Vec<String>,
///
/// If all dependencies can be activated and resolved to a version in the
/// dependency graph, cx.resolve is returned.
-fn activate_deps_loop(mut cx: Context,
- registry: &mut Registry,
- top: Rc<Summary>,
- top_method: &Method) -> CargoResult<Context> {
+fn activate_deps_loop<'a>(mut cx: Context<'a>,
+ registry: &mut Registry,
+ top: Rc<Summary>,
+ top_method: &Method) -> CargoResult<Context<'a>> {
// Note that a `BinaryHeap` is used for the remaining dependencies that need
// activation. This heap is sorted such that the "largest value" is the most
// constrained dependency, or the one with the least candidates.
// use (those with more candidates).
let mut backtrack_stack = Vec::new();
let mut remaining_deps = BinaryHeap::new();
- remaining_deps.extend(try!(activate(&mut cx, registry, top, &top_method)));
+ remaining_deps.extend(try!(activate(&mut cx, registry, None,
+ Candidate { summary: top, replace: None },
+ &top_method)));
// Main resolution loop, this is the workhorse of the resolution algorithm.
//
// the left-most nonzero digit is the same they're considered
// compatible.
candidates.iter().filter(|&b| {
- prev_active.iter().any(|a| a == b) ||
+ prev_active.iter().any(|a| *a == b.summary) ||
prev_active.iter().all(|a| {
- !compatible(a.version(), b.version())
+ !compatible(a.version(), b.summary.version())
})
}).cloned().collect()
};
uses_default_features: dep.uses_default_features(),
};
trace!("{}[{}]>{} trying {}", parent.name(), cur, dep.name(),
- candidate.version());
- cx.resolve.graph.link(parent.package_id().clone(),
- candidate.package_id().clone());
- remaining_deps.extend(try!(activate(&mut cx, registry,
+ candidate.summary.version());
+ remaining_deps.extend(try!(activate(&mut cx, registry, Some(&parent),
candidate, &method)));
}
trace!("resolved: {:?}", cx.resolve);
// Searches up `backtrack_stack` until it finds a dependency with remaining
// candidates. Resets `cx` and `remaining_deps` to that level and returns the
// next candidate. If all candidates have been exhausted, returns None.
-fn find_candidate(backtrack_stack: &mut Vec<BacktrackFrame>,
- cx: &mut Context,
- remaining_deps: &mut BinaryHeap<DepsFrame>,
- parent: &mut Rc<Summary>,
- cur: &mut usize,
- dep: &mut Dependency,
- features: &mut Vec<String>) -> Option<Rc<Summary>> {
+fn find_candidate<'a>(backtrack_stack: &mut Vec<BacktrackFrame<'a>>,
+ cx: &mut Context<'a>,
+ remaining_deps: &mut BinaryHeap<DepsFrame>,
+ parent: &mut Rc<Summary>,
+ cur: &mut usize,
+ dep: &mut Dependency,
+ features: &mut Vec<String>) -> Option<Candidate> {
while let Some(mut frame) = backtrack_stack.pop() {
if let Some((_, candidate)) = frame.remaining_candidates.next() {
*cx = frame.context_backup.clone();
parent: &Summary,
dep: &Dependency,
prev_active: &[Rc<Summary>],
- candidates: &[Rc<Summary>]) -> Box<CargoError> {
+ candidates: &[Candidate]) -> Box<CargoError> {
if candidates.len() > 0 {
let mut msg = format!("failed to select a version for `{}` \
(required by `{}`):\n\
msg.push_str(&format!("\n possible versions to select: {}",
candidates.iter()
- .map(|v| v.version())
+ .map(|v| v.summary.version())
.map(|v| v.to_string())
.collect::<Vec<_>>()
.join(", ")));
}
}
-impl Context {
+impl<'a> Context<'a> {
// Activate this summary by inserting it into our list of known activations.
//
// Returns if this summary with the given method is already activated.
}
}
- fn build_deps(&mut self, registry: &mut Registry,
- parent: &Summary,
+ fn build_deps(&mut self,
+ registry: &mut Registry,
+ candidate: &Summary,
method: &Method) -> CargoResult<Vec<DepInfo>> {
// First, figure out our set of dependencies based on the requsted set
// of features. This also calculates what features we're going to enable
// for our own dependencies.
- let deps = try!(self.resolve_features(parent, method));
+ let deps = try!(self.resolve_features(candidate, method));
// Next, transform all dependencies into a list of possible candidates
// which can satisfy that dependency.
let mut deps = try!(deps.into_iter().map(|(dep, features)| {
- let mut candidates = try!(registry.query(&dep));
+ let mut candidates = try!(self.query(registry, &dep));
// When we attempt versions for a package, we'll want to start at
// the maximum version and work our way down.
candidates.sort_by(|a, b| {
- b.version().cmp(a.version())
+ b.summary.version().cmp(a.summary.version())
});
- let candidates = candidates.into_iter().map(Rc::new).collect();
Ok((dep, candidates, features))
}).collect::<CargoResult<Vec<DepInfo>>>());
// dependencies with more candidates. This way if the dependency with
// only one candidate can't be resolved we don't have to do a bunch of
// work before we figure that out.
- deps.sort_by(|&(_, ref a, _), &(_, ref b, _)| {
- a.len().cmp(&b.len())
- });
+ deps.sort_by_key(|&(_, ref a, _)| a.len());
Ok(deps)
}
+ /// Queries the `registry` to return a list of candidates for `dep`.
+ ///
+ /// This method is the location where overrides are taken into account. If
+ /// any candidates are returned which match an override then the override is
+ /// applied by performing a second query for what the override should
+ /// return.
+ fn query(&self,
+ registry: &mut Registry,
+ dep: &Dependency) -> CargoResult<Vec<Candidate>> {
+ let summaries = try!(registry.query(dep));
+ summaries.into_iter().map(Rc::new).map(|summary| {
+ let mut replace = None;
+ let mut matched_spec = None;
+ for &(ref spec, ref dep) in self.replacements.iter() {
+ if !spec.matches(summary.package_id()) {
+ continue
+ }
+
+ if replace.is_some() {
+ bail!("overlapping replacement specifications found:\n\n \
+ * {}\n * {}\n\nboth specifications match: {}",
+ matched_spec.unwrap(), spec, summary.package_id());
+ }
+
+ let mut summaries = try!(registry.query(dep)).into_iter();
+ let s = try!(summaries.next().chain_error(|| {
+ human(format!("no matching package for override `{}` found\n\
+ location searched: {}\n\
+ version required: {}",
+ spec, dep.source_id(), dep.version_req()))
+ }));
+ let summaries = summaries.collect::<Vec<_>>();
+ if summaries.len() > 0 {
+ let bullets = summaries.iter().map(|s| {
+ format!(" * {}", s.package_id())
+ }).collect::<Vec<_>>();
+ bail!("the replacement specification `{}` matched \
+ multiple packages:\n * {}\n{}", spec,
+ s.package_id(), bullets.join("\n"));
+ }
+
+ // The dependency should be hard-coded to have the same name and
+ // an exact version requirement, so both of these assertions
+ // should never fail.
+ assert_eq!(s.version(), summary.version());
+ assert_eq!(s.name(), summary.name());
+
+ replace = Some(Rc::new(s));
+ matched_spec = Some(spec.clone());
+ }
+ Ok(Candidate { summary: summary, replace: replace })
+ }).collect()
+ }
+
fn prev_active(&self, dep: &Dependency) -> &[Rc<Summary>] {
let key = (dep.name().to_string(), dep.source_id().clone());
self.activations.get(&key).map(|v| &v[..]).unwrap_or(&[])
}
- fn resolve_features(&mut self, parent: &Summary, method: &Method)
- -> CargoResult<Vec<(Dependency, Vec<String>)>> {
+ fn resolve_features(&mut self, candidate: &Summary, method: &Method)
+ -> CargoResult<Vec<(Dependency, Vec<String>)>> {
let dev_deps = match *method {
Method::Everything => true,
Method::Required { dev_deps, .. } => dev_deps,
};
// First, filter by dev-dependencies
- let deps = parent.dependencies();
+ let deps = candidate.dependencies();
let deps = deps.iter().filter(|d| d.is_transitive() || dev_deps);
- let (mut feature_deps, used_features) = try!(build_features(parent,
+ let (mut feature_deps, used_features) = try!(build_features(candidate,
method));
let mut ret = Vec::new();
if !unknown.is_empty() {
let features = unknown.join(", ");
bail!("Package `{}` does not have these features: `{}`",
- parent.package_id(), features)
+ candidate.package_id(), features)
}
}
// Record what list of features is active for this package.
if !used_features.is_empty() {
- let pkgid = parent.package_id();
+ let pkgid = candidate.package_id();
self.resolve.features.entry(pkgid.clone())
.or_insert(HashSet::new())
.extend(used_features);
// dependencies.
if checked.insert(id) {
let summary = summaries[id];
- for dep in resolve.deps(id).into_iter().flat_map(|a| a) {
+ for dep in resolve.deps(id) {
let is_transitive = summary.dependencies().iter().any(|d| {
d.matches_id(dep) && d.is_transitive()
});
return
}
set.insert(dep);
- if let Some(deps) = resolve.deps(dep) {
- for dep in deps {
- fill_with_deps(resolve, dep, set, visited);
- }
+ for dep in resolve.deps(dep) {
+ fill_with_deps(resolve, dep, set, visited);
}
}
nodes: resolve.iter().map(|id| {
Node {
id: id,
- dependencies: resolve.deps(id)
- .map(|it| it.collect())
- .unwrap_or(Vec::new()),
+ dependencies: resolve.deps(id).collect(),
}
}).collect(),
};
}
let id = unit.pkg.package_id();
- let deps = self.resolve.deps(id).into_iter().flat_map(|a| a);
+ let deps = self.resolve.deps(id);
let mut ret = try!(deps.filter(|dep| {
unit.pkg.dependencies().iter().filter(|d| {
d.name() == dep.name()
/// Returns the dependencies necessary to document a package
fn doc_deps(&self, unit: &Unit<'a>) -> CargoResult<Vec<Unit<'a>>> {
- let deps = self.resolve.deps(unit.pkg.package_id()).into_iter();
- let deps = deps.flat_map(|a| a).filter(|dep| {
+ let deps = self.resolve.deps(unit.pkg.package_id()).filter(|dep| {
unit.pkg.dependencies().iter().filter(|d| {
d.name() == dep.name()
}).any(|dep| {
out.push_str("]\n");
}
out.push_str("\n");
+ } else if dep.contains_key("replace") {
+ out.push_str(&format!("replace = {}\n\n", lookup(dep, "replace")));
}
}
previous: Option<&'a Resolve>,
to_avoid: Option<&HashSet<&'a PackageId>>)
-> CargoResult<Resolve> {
-
try!(registry.add_sources(&[package.package_id().source_id()
.clone()]));
// TODO: This seems like a hokey reason to single out the registry as being
// different
let mut to_avoid_sources = HashSet::new();
- match to_avoid {
- Some(set) => {
- for package_id in set.iter() {
- let source = package_id.source_id();
- if !source.is_registry() {
- to_avoid_sources.insert(source);
- }
- }
- }
- None => {}
+ if let Some(to_avoid) = to_avoid {
+ to_avoid_sources.extend(to_avoid.iter()
+ .map(|p| p.source_id())
+ .filter(|s| !s.is_registry()));
}
let summary = package.summary().clone();
- let summary = match previous {
+ let (summary, replace) = match previous {
Some(r) => {
// In the case where a previous instance of resolve is available, we
// want to lock as many packages as possible to the previous version
// to the previously resolved version if the dependency listed
// still matches the locked version.
for node in r.iter().filter(|p| keep(p, to_avoid, &to_avoid_sources)) {
- let deps = r.deps(node).into_iter().flat_map(|i| i)
+ let deps = r.deps_not_replaced(node)
.filter(|p| keep(p, to_avoid, &to_avoid_sources))
.cloned().collect();
registry.register_lock(node.clone(), deps);
}
- let map = r.deps(r.root()).into_iter().flat_map(|i| i).filter(|p| {
- keep(p, to_avoid, &to_avoid_sources)
- }).map(|d| {
- (d.name(), d)
- }).collect::<HashMap<_, _>>();
- summary.map_dependencies(|d| {
- match map.get(d.name()) {
- Some(&lock) if d.matches_id(lock) => d.lock_to(lock),
- _ => d,
+ let summary = {
+ let map = r.deps_not_replaced(r.root()).filter(|p| {
+ keep(p, to_avoid, &to_avoid_sources)
+ }).map(|d| {
+ (d.name(), d)
+ }).collect::<HashMap<_, _>>();
+
+ summary.map_dependencies(|dep| {
+ match map.get(dep.name()) {
+ Some(&lock) if dep.matches_id(lock) => dep.lock_to(lock),
+ _ => dep,
+ }
+ })
+ };
+ let replace = package.manifest().replace();
+ let replace = replace.iter().map(|&(ref spec, ref dep)| {
+ for (key, val) in r.replacements().iter() {
+ if spec.matches(key) && dep.matches_id(val) {
+ return (spec.clone(), dep.clone().lock_to(val))
+ }
}
- })
+ (spec.clone(), dep.clone())
+ }).collect::<Vec<_>>();
+ (summary, replace)
}
- None => summary,
+ None => (summary, package.manifest().replace().to_owned()),
};
- let mut resolved = try!(resolver::resolve(&summary, &method, registry));
- match previous {
- Some(r) => resolved.copy_metadata(r),
- None => {}
+ let mut resolved = try!(resolver::resolve(&summary, &method, &replace,
+ registry));
+ if let Some(previous) = previous {
+ resolved.copy_metadata(previous);
}
return Ok(resolved);
use std::str;
use toml;
-use semver;
+use semver::{self, VersionReq};
use rustc_serialize::{Decodable, Decoder};
-use core::{SourceId, Profiles};
+use core::{SourceId, Profiles, PackageIdSpec};
use core::{Summary, Manifest, Target, Dependency, DependencyInner, PackageId,
GitReference};
use core::dependency::{Kind, Platform};
build_dependencies: Option<HashMap<String, TomlDependency>>,
features: Option<HashMap<String, Vec<String>>>,
target: Option<HashMap<String, TomlPlatform>>,
+ replace: Option<HashMap<String, TomlDependency>>,
}
#[derive(RustcDecodable, Clone, Default)]
}
let mut deps = Vec::new();
+ let mut replace = Vec::new();
{
Some(Kind::Development)));
}
}
+
+ if let Some(ref map) = self.replace {
+ for (spec, replacement) in map {
+ let spec = try!(PackageIdSpec::parse(spec));
+
+ let version_specified = match *replacement {
+ TomlDependency::Detailed(ref d) => d.version.is_some(),
+ TomlDependency::Simple(..) => true,
+ };
+ if version_specified {
+ bail!("replacements cannot specify a version \
+ requirement, but found one for `{}`", spec);
+ }
+
+ let dep = try!(replacement.to_dependency(spec.name(),
+ &mut cx,
+ None));
+ let dep = {
+ let version = try!(spec.version().chain_error(|| {
+ human(format!("replacements must specify a version \
+ to replace, but `{}` does not",
+ spec))
+ }));
+ let req = VersionReq::exact(version);
+ dep.clone_inner().set_version_req(req)
+ .into_dependency()
+ };
+ replace.push((spec, dep));
+ }
+ }
}
let exclude = project.exclude.clone().unwrap_or(Vec::new());
project.links.clone(),
metadata,
profiles,
- publish);
+ publish,
+ replace);
if project.license_file.is_some() && project.license.is_some() {
manifest.add_warning(format!("only one of `license` or \
`license-file` is necessary"));
}
}
-fn process_dependencies(cx: &mut Context,
- new_deps: Option<&HashMap<String, TomlDependency>>,
- kind: Option<Kind>)
- -> CargoResult<()> {
- let dependencies = match new_deps {
- Some(ref dependencies) => dependencies,
- None => return Ok(())
- };
- for (n, v) in dependencies.iter() {
- let details = match *v {
+impl TomlDependency {
+ fn to_dependency(&self,
+ name: &str,
+ cx: &mut Context,
+ kind: Option<Kind>)
+ -> CargoResult<Dependency> {
+ let details = match *self {
TomlDependency::Simple(ref version) => {
let mut d: DetailedTomlDependency = Default::default();
d.version = Some(version.clone());
if details.version.is_none() && details.path.is_none() &&
details.git.is_none() {
- cx.warnings.push(format!("dependency ({}) specified \
- without providing a local path, Git \
- repository, or version to use. This will \
- be considered an error in future \
- versions", n));
+ let msg = format!("dependency ({}) specified without \
+ providing a local path, Git repository, or \
+ version to use. This will be considered an \
+ error in future versions", name);
+ cx.warnings.push(msg);
}
let reference = details.branch.clone().map(GitReference::Branch)
}.unwrap_or(try!(SourceId::for_central(cx.config)));
let version = details.version.as_ref().map(|v| &v[..]);
- let mut dep = try!(DependencyInner::parse(&n, version, &new_source_id));
+ let mut dep = try!(DependencyInner::parse(name, version, &new_source_id));
dep = dep.set_features(details.features.unwrap_or(Vec::new()))
.set_default_features(details.default_features.unwrap_or(true))
.set_optional(details.optional.unwrap_or(false))
if let Some(kind) = kind {
dep = dep.set_kind(kind);
}
- cx.deps.push(dep.into_dependency());
+ Ok(dep.into_dependency())
+ }
+}
+
+fn process_dependencies(cx: &mut Context,
+ new_deps: Option<&HashMap<String, TomlDependency>>,
+ kind: Option<Kind>)
+ -> CargoResult<()> {
+ let dependencies = match new_deps {
+ Some(ref dependencies) => dependencies,
+ None => return Ok(())
+ };
+ for (n, v) in dependencies.iter() {
+ let dep = try!(v.to_dependency(n, cx, kind));
+ cx.deps.push(dep);
}
Ok(())
-> CargoResult<Vec<PackageId>> {
let summary = Summary::new(pkg, deps, HashMap::new()).unwrap();
let method = Method::Everything;
- Ok(try!(resolver::resolve(&summary, &method, registry)).iter().map(|p| {
+ Ok(try!(resolver::resolve(&summary, &method, &[], registry)).iter().map(|p| {
p.clone()
}).collect())
}
--- /dev/null
+use hamcrest::assert_that;
+
+use support::registry::{registry, Package};
+use support::{execs, project, UPDATING, DOWNLOADING, COMPILING};
+use support::git;
+use support::paths;
+
+fn setup() {}
+
+test!(override_simple {
+ Package::new("foo", "0.1.0").publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("src/lib.rs", "pub fn foo() {}");
+ foo.build();
+
+ let p = project("local")
+ .file("Cargo.toml", &format!(r#"
+ [package]
+ name = "local"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = "0.1.0"
+
+ [replace]
+ "foo:0.1.0" = {{ git = '{}' }}
+ "#, foo.url()))
+ .file("src/lib.rs", "
+ extern crate foo;
+ pub fn bar() {
+ foo::foo();
+ }
+ ");
+
+ assert_that(p.cargo_process("build"),
+ execs().with_status(0).with_stdout(&format!("\
+{updating} registry `file://[..]`
+{updating} git repository `[..]`
+{compiling} foo v0.1.0 (file://[..])
+{compiling} local v0.0.1 (file://[..])
+",
+ updating = UPDATING, compiling = COMPILING)));
+});
+
+test!(missing_version {
+ let p = project("local")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "local"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = "0.1.0"
+
+ [replace]
+ foo = { git = 'https://example.com' }
+ "#)
+ .file("src/lib.rs", "");
+
+ assert_that(p.cargo_process("build"),
+ execs().with_status(101).with_stderr("\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ replacements must specify a version to replace, but `foo` does not
+"));
+});
+
+test!(different_version {
+ Package::new("foo", "0.2.0").publish();
+ Package::new("foo", "0.1.0").publish();
+
+ let p = project("local")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "local"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = "0.1.0"
+
+ [replace]
+ "foo:0.1.0" = "0.2.0"
+ "#)
+ .file("src/lib.rs", "");
+
+ assert_that(p.cargo_process("build"),
+ execs().with_status(101).with_stderr("\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ replacements cannot specify a version requirement, but found one for [..]
+"));
+});
+
+test!(transitive {
+ Package::new("foo", "0.1.0").publish();
+ Package::new("bar", "0.2.0")
+ .dep("foo", "0.1.0")
+ .file("src/lib.rs", "extern crate foo; fn bar() { foo::foo(); }")
+ .publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("src/lib.rs", "pub fn foo() {}");
+ foo.build();
+
+ let p = project("local")
+ .file("Cargo.toml", &format!(r#"
+ [package]
+ name = "local"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.2.0"
+
+ [replace]
+ "foo:0.1.0" = {{ git = '{}' }}
+ "#, foo.url()))
+ .file("src/lib.rs", "");
+
+ assert_that(p.cargo_process("build"),
+ execs().with_status(0).with_stdout(&format!("\
+{updating} registry `file://[..]`
+{updating} git repository `[..]`
+{downloading} bar v0.2.0 (registry [..])
+{compiling} foo v0.1.0 (file://[..])
+{compiling} bar v0.2.0 (registry [..])
+{compiling} local v0.0.1 (file://[..])
+",
+ updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING)));
+
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
+});
+
+test!(persists_across_rebuilds {
+ Package::new("foo", "0.1.0").publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("src/lib.rs", "pub fn foo() {}");
+ foo.build();
+
+ let p = project("local")
+ .file("Cargo.toml", &format!(r#"
+ [package]
+ name = "local"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = "0.1.0"
+
+ [replace]
+ "foo:0.1.0" = {{ git = '{}' }}
+ "#, foo.url()))
+ .file("src/lib.rs", "
+ extern crate foo;
+ pub fn bar() {
+ foo::foo();
+ }
+ ");
+
+ assert_that(p.cargo_process("build"),
+ execs().with_status(0).with_stdout(&format!("\
+{updating} registry `file://[..]`
+{updating} git repository `file://[..]`
+{compiling} foo v0.1.0 (file://[..])
+{compiling} local v0.0.1 (file://[..])
+",
+ updating = UPDATING, compiling = COMPILING)));
+
+ assert_that(p.cargo("build"),
+ execs().with_status(0).with_stdout(""));
+});
+
+test!(replace_registry_with_path {
+ Package::new("foo", "0.1.0").publish();
+
+ project("foo")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("src/lib.rs", "pub fn foo() {}")
+ .build();
+
+ let p = project("local")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "local"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = "0.1.0"
+
+ [replace]
+ "foo:0.1.0" = { path = "../foo" }
+ "#)
+ .file("src/lib.rs", "
+ extern crate foo;
+ pub fn bar() {
+ foo::foo();
+ }
+ ");
+
+ assert_that(p.cargo_process("build"),
+ execs().with_status(0).with_stdout(&format!("\
+{updating} registry `file://[..]`
+{compiling} foo v0.1.0 (file://[..])
+{compiling} local v0.0.1 (file://[..])
+",
+ compiling = COMPILING, updating = UPDATING)));
+});
+
+test!(use_a_spec_to_select {
+ Package::new("foo", "0.1.1")
+ .file("src/lib.rs", "pub fn foo1() {}")
+ .publish();
+ Package::new("foo", "0.2.0").publish();
+ Package::new("bar", "0.1.1")
+ .dep("foo", "0.2")
+ .file("src/lib.rs", "
+ extern crate foo;
+ pub fn bar() { foo::foo3(); }
+ ")
+ .publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.2.0"
+ authors = []
+ "#)
+ .file("src/lib.rs", "pub fn foo3() {}");
+ foo.build();
+
+ let p = project("local")
+ .file("Cargo.toml", &format!(r#"
+ [package]
+ name = "local"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+ foo = "0.1"
+
+ [replace]
+ "foo:0.2.0" = {{ git = '{}' }}
+ "#, foo.url()))
+ .file("src/lib.rs", "
+ extern crate foo;
+ extern crate bar;
+
+ fn local() {
+ foo::foo1();
+ bar::bar();
+ }
+ ");
+
+ assert_that(p.cargo_process("build"),
+ execs().with_status(0).with_stdout(&format!("\
+{updating} registry `file://[..]`
+{updating} git repository `[..]`
+{downloading} [..]
+{downloading} [..]
+{compiling} [..]
+{compiling} [..]
+{compiling} [..]
+{compiling} local v0.0.1 (file://[..])
+",
+ updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING)));
+});
+
+test!(override_adds_some_deps {
+ Package::new("foo", "0.1.1").publish();
+ Package::new("bar", "0.1.0").publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = "0.1"
+ "#)
+ .file("src/lib.rs", "");
+ foo.build();
+
+ let p = project("local")
+ .file("Cargo.toml", &format!(r#"
+ [package]
+ name = "local"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ bar = "0.1"
+
+ [replace]
+ "bar:0.1.0" = {{ git = '{}' }}
+ "#, foo.url()))
+ .file("src/lib.rs", "");
+
+ assert_that(p.cargo_process("build"),
+ execs().with_status(0).with_stdout(&format!("\
+{updating} registry `file://[..]`
+{updating} git repository `[..]`
+{downloading} foo v0.1.1 (registry [..])
+{compiling} foo v0.1.1 (registry [..])
+{compiling} bar v0.1.0 ([..])
+{compiling} local v0.0.1 (file://[..])
+",
+ updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING)));
+
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
+
+ Package::new("foo", "0.1.2").publish();
+ assert_that(p.cargo("update").arg("-p").arg(&format!("{}#bar", foo.url())),
+ execs().with_status(0).with_stdout(&format!("\
+{updating} git repository `file://[..]`
+", updating = UPDATING)));
+ assert_that(p.cargo("update").arg("-p").arg(&format!("{}#bar", registry())),
+ execs().with_status(0).with_stdout(&format!("\
+{updating} registry `file://[..]`
+", updating = UPDATING)));
+
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
+});
+
+test!(locked_means_locked_yes_no_seriously_i_mean_locked {
+ // this in theory exercises #2041
+ Package::new("foo", "0.1.0").publish();
+ Package::new("foo", "0.2.0").publish();
+ Package::new("bar", "0.1.0").publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+
+ [dependencies]
+ foo = "*"
+ "#)
+ .file("src/lib.rs", "");
+ foo.build();
+
+ let p = project("local")
+ .file("Cargo.toml", &format!(r#"
+ [package]
+ name = "local"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = "0.1"
+ bar = "0.1"
+
+ [replace]
+ "bar:0.1.0" = {{ git = '{}' }}
+ "#, foo.url()))
+ .file("src/lib.rs", "");
+
+ assert_that(p.cargo_process("build"),
+ execs().with_status(0));
+
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
+ assert_that(p.cargo("build"), execs().with_status(0).with_stdout(""));
+});
+
+test!(override_wrong_name {
+ Package::new("foo", "0.1.0").publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("src/lib.rs", "");
+ foo.build();
+
+ let p = project("local")
+ .file("Cargo.toml", &format!(r#"
+ [package]
+ name = "local"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = "0.1"
+
+ [replace]
+ "foo:0.1.0" = {{ git = '{}' }}
+ "#, foo.url()))
+ .file("src/lib.rs", "");
+
+ assert_that(p.cargo_process("build"),
+ execs().with_status(101).with_stderr("\
+error: no matching package for override `foo:0.1.0` found
+location searched: file://[..]
+version required: = 0.1.0
+"));
+});
+
+test!(override_with_nothing {
+ Package::new("foo", "0.1.0").publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("src/lib.rs", "");
+ foo.build();
+
+ let p = project("local")
+ .file("Cargo.toml", &format!(r#"
+ [package]
+ name = "local"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = "0.1"
+
+ [replace]
+ "foo:0.1.0" = {{ git = '{}' }}
+ "#, foo.url()))
+ .file("src/lib.rs", "");
+
+ assert_that(p.cargo_process("build"),
+ execs().with_status(101).with_stderr("\
+error: Unable to update file://[..]
+
+Caused by:
+ Could not find Cargo.toml in `[..]`
+"));
+});
+
+test!(override_wrong_version {
+ let p = project("local")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "local"
+ version = "0.0.1"
+ authors = []
+
+ [replace]
+ "foo:0.1.0" = { git = 'https://example.com', version = '0.2.0' }
+ "#)
+ .file("src/lib.rs", "");
+
+ assert_that(p.cargo_process("build"),
+ execs().with_status(101).with_stderr("\
+error: failed to parse manifest at `[..]`
+
+Caused by:
+ replacements cannot specify a version requirement, but found one for `foo:0.1.0`
+"));
+});
+
+test!(multiple_specs {
+ Package::new("foo", "0.1.0").publish();
+
+ let foo = git::repo(&paths::root().join("override"))
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("src/lib.rs", "pub fn foo() {}");
+ foo.build();
+
+ let p = project("local")
+ .file("Cargo.toml", &format!(r#"
+ [package]
+ name = "local"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ foo = "0.1.0"
+
+ [replace]
+ "foo:0.1.0" = {{ git = '{0}' }}
+ "{1}#foo:0.1.0" = {{ git = '{0}' }}
+ "#, foo.url(), registry()))
+ .file("src/lib.rs", "");
+
+ assert_that(p.cargo_process("build"),
+ execs().with_status(101).with_stderr("\
+error: overlapping replacement specifications found:
+
+ * [..]
+ * [..]
+
+both specifications match: foo v0.1.0 ([..])
+"));
+});
mod test_cargo_new;
mod test_cargo_package;
mod test_cargo_profiles;
+mod test_cargo_overrides;
mod test_cargo_publish;
mod test_cargo_read_manifest;
mod test_cargo_registry;